From 5430db61f6c2668ec94945c91a1df22015ff1cf8 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 3 Feb 2016 11:04:07 -0800 Subject: [PATCH] Add sha256 checksums to the lockfile This commit changes how lock files are encoded by checksums for each package in the lockfile to the `[metadata]` section. The previous commit implemented the ability to redirect sources, but the core assumption there was that a package coming from two different locations was always the same. An inevitable case, however, is that a source gets corrupted or, worse, ships a modified version of a crate to introduce instability between two "mirrors". The purpose of adding checksums will be to resolve this discrepancy. Each crate coming from crates.io will now record its sha256 checksum in the lock file. When a lock file already exists, the new checksum for a crate will be checked against it, and if they differ compilation will be aborted. Currently only registry crates will have sha256 checksums listed, all other sources do not have checksums at this time. The astute may notice that if the lock file format is changing, then a lock file generated by a newer Cargo might be mangled by an older Cargo. In anticipation of this, however, all Cargo versions published support a `[metadata]` section of the lock file which is transparently carried forward if encountered. This means that older Cargos compiling with a newer lock file will not verify checksums in the lock file, but they will carry forward the checksum information and prevent it from being removed. There are, however, a few situations where problems may still arise: 1. If an older Cargo takes a newer lockfile (with checksums) and updates it with a modified `Cargo.toml` (e.g. a package was added, removed, or updated), then the `[metadata]` section will not be updated appropriately. This modification would require a newer Cargo to come in and update the checksums for such a modification. 2. Today Cargo can only calculate checksums for registry sources, but we may eventually want to support other sources like git (or just straight-up path sources). If future Cargo implements support for this sort of checksum, then it's the same problem as above where older Cargos will not know how to keep the checksum in sync --- src/cargo/core/registry.rs | 8 + src/cargo/core/resolver/encode.rs | 110 +++++++++-- src/cargo/core/resolver/mod.rs | 152 +++++++++++---- src/cargo/core/summary.rs | 10 + src/cargo/ops/resolve.rs | 2 +- src/cargo/sources/config.rs | 24 ++- src/cargo/sources/registry.rs | 8 +- tests/cargotest/support/registry.rs | 2 +- tests/lockfile-compat.rs | 278 ++++++++++++++++++++++++++++ tests/registry.rs | 35 +++- 10 files changed, 564 insertions(+), 65 deletions(-) create mode 100644 tests/lockfile-compat.rs diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 6e7d1bf8f..90a1b76d2 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -11,6 +11,14 @@ use sources::config::SourceConfigMap; pub trait Registry { /// Attempt to find the packages that match a dependency request. fn query(&mut self, name: &Dependency) -> CargoResult>; + + /// Returns whether or not this registry will return summaries with + /// checksums listed. + /// + /// By default, registries do not support checksums. + fn supports_checksums(&self) -> bool { + false + } } impl Registry for Vec { diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 31bc22b7a..8771a9532 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -1,10 +1,12 @@ use std::collections::{HashMap, BTreeMap}; +use std::fmt; +use std::str::FromStr; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use core::{Package, PackageId, SourceId, Workspace}; -use util::{CargoResult, Graph, Config}; +use util::{CargoResult, Graph, Config, internal, ChainError, CargoError}; use super::Resolve; @@ -18,7 +20,7 @@ pub struct EncodableResolve { pub type Metadata = BTreeMap; impl EncodableResolve { - pub fn to_resolve(&self, ws: &Workspace) -> CargoResult { + pub fn to_resolve(self, ws: &Workspace) -> CargoResult { let path_deps = build_path_deps(ws); let default = try!(ws.current()).package_id().source_id(); @@ -90,13 +92,56 @@ impl EncodableResolve { try!(add_dependencies(id, pkg)); } } + let mut metadata = self.metadata.unwrap_or(BTreeMap::new()); + + // Parse out all package checksums. After we do this we can be in a few + // situations: + // + // * We parsed no checksums. In this situation we're dealing with an old + // lock file and we're gonna fill them all in. + // * We parsed some checksums, but not one for all packages listed. It + // could have been the case that some were listed, then an older Cargo + // client added more dependencies, and now we're going to fill in the + // missing ones. + // * There are too many checksums listed, indicative of an older Cargo + // client removing a package but not updating the checksums listed. + // + // In all of these situations they're part of normal usage, so we don't + // really worry about it. We just try to slurp up as many checksums as + // possible. + let mut checksums = HashMap::new(); + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let id: EncodablePackageId = try!(k.parse().chain_error(|| { + internal("invalid encoding of checksum in lockfile") + })); + let id = try!(to_package_id(&id.name, + &id.version, + id.source.as_ref(), + default, + &path_deps)); + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + + for k in to_remove { + metadata.remove(&k); + } Ok(Resolve { graph: g, root: root, features: HashMap::new(), - metadata: self.metadata.clone(), replacements: replacements, + checksums: checksums, + metadata: metadata, }) } } @@ -168,33 +213,30 @@ pub struct EncodablePackageId { source: Option } -impl Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut out = format!("{} {}", self.name, self.version); +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(write!(f, "{} {}", self.name, self.version)); if let Some(ref s) = self.source { - out.push_str(&format!(" ({})", s.to_url())); + try!(write!(f, " ({})", s.to_url())); } - out.encode(s) + Ok(()) } } -impl Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); +impl FromStr for EncodablePackageId { + type Err = Box; + + fn from_str(s: &str) -> CargoResult { let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = try!(regex.captures(&string).ok_or_else(|| { - d.error("invalid serialized PackageId") + let captures = try!(regex.captures(s).ok_or_else(|| { + internal("invalid serialized PackageId") })); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let source_id = match captures.at(3) { - Some(s) => { - Some(try!(SourceId::from_url(s).map_err(|e| { - d.error(&e.to_string()) - }))) - } + Some(s) => Some(try!(SourceId::from_url(s))), None => None, }; @@ -206,6 +248,21 @@ impl Decodable for EncodablePackageId { } } +impl Encodable for EncodablePackageId { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.to_string().encode(s) + } +} + +impl Decodable for EncodablePackageId { + fn decode(d: &mut D) -> Result { + String::decode(d).and_then(|string| { + string.parse::() + .map_err(|e| d.error(&e.to_string())) + }) + } +} + pub struct WorkspaceResolve<'a, 'cfg: 'a> { pub ws: &'a Workspace<'cfg>, pub resolve: &'a Resolve, @@ -226,12 +283,25 @@ impl<'a, 'cfg> Encodable for WorkspaceResolve<'a, 'cfg> { } Some(encodable_resolve_node(id, self.resolve)) - }).collect::>(); + }).collect::>(); + + let mut metadata = self.resolve.metadata.clone(); + + for id in ids.iter().filter(|id| !id.source_id().is_path()) { + let checksum = match self.resolve.checksums[*id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id); + metadata.insert(format!("checksum {}", id.to_string()), + checksum.to_string()); + } + let metadata = if metadata.len() == 0 {None} else {Some(metadata)}; EncodableResolve { package: Some(encodable), root: encodable_resolve_node(&root, self.resolve), - metadata: self.resolve.metadata.clone(), + metadata: metadata, }.encode(s) } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 2ecbbd0af..5f91ed735 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -46,7 +46,7 @@ //! over the place. use std::cmp::Ordering; -use std::collections::{HashSet, HashMap, BinaryHeap}; +use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap}; use std::fmt; use std::ops::Range; use std::rc::Rc; @@ -75,8 +75,9 @@ pub struct Resolve { graph: Graph, replacements: HashMap, features: HashMap>, + checksums: HashMap>, root: PackageId, - metadata: Option, + metadata: Metadata, } pub struct Deps<'a> { @@ -115,27 +116,93 @@ struct Candidate { } impl Resolve { - fn new(root: PackageId) -> Resolve { - let mut g = Graph::new(); - g.add(root.clone(), &[]); - Resolve { - graph: g, - root: root, - replacements: HashMap::new(), - features: HashMap::new(), - metadata: None, + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package id has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacment, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + bail!("\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", id, id.source_id()) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + bail!("\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` was the same as before in either situation +", id, id.source_id()) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + bail!("\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` was the same as before in any situation +", id); + } + } } - } - pub fn copy_metadata(&mut self, other: &Resolve) { - self.metadata = other.metadata.clone(); + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + Ok(()) } pub fn iter(&self) -> Nodes { self.graph.iter() } - pub fn root(&self) -> &PackageId { &self.root } + pub fn root(&self) -> &PackageId { + &self.root + } pub fn deps(&self, pkg: &PackageId) -> Deps { Deps { edges: self.graph.edges(pkg), resolve: self } @@ -194,7 +261,9 @@ impl<'a> Iterator for DepsNotReplaced<'a> { #[derive(Clone)] struct Context<'a> { activations: HashMap<(String, SourceId), Vec>>, - resolve: Resolve, + resolve_graph: Graph, + resolve_features: HashMap>, + resolve_replacements: HashMap, replacements: &'a [(PackageIdSpec, Dependency)], } @@ -204,14 +273,33 @@ pub fn resolve(root: &PackageId, replacements: &[(PackageIdSpec, Dependency)], registry: &mut Registry) -> CargoResult { let cx = Context { - resolve: Resolve::new(root.clone()), + resolve_graph: Graph::new(), + resolve_features: HashMap::new(), + resolve_replacements: HashMap::new(), activations: HashMap::new(), replacements: replacements, }; let _p = profile::start(format!("resolving: {}", root)); let cx = try!(activate_deps_loop(cx, registry, summaries)); - try!(check_cycles(&cx)); - Ok(cx.resolve) + + let mut resolve = Resolve { + graph: cx.resolve_graph, + features: cx.resolve_features, + root: root.clone(), + checksums: HashMap::new(), + metadata: BTreeMap::new(), + replacements: cx.resolve_replacements, + }; + + for summary in cx.activations.values().flat_map(|v| v.iter()) { + let cksum = summary.checksum().map(|s| s.to_string()); + resolve.checksums.insert(summary.package_id().clone(), cksum); + } + + try!(check_cycles(&resolve, &cx.activations)); + + trace!("resolved: {:?}", resolve); + Ok(resolve) } /// Attempts to activate the summary `candidate` in the context `cx`. @@ -227,7 +315,7 @@ fn activate(cx: &mut Context, method: &Method) -> CargoResult> { if let Some(parent) = parent { - cx.resolve.graph.link(parent.package_id().clone(), + cx.resolve_graph.link(parent.package_id().clone(), candidate.summary.package_id().clone()); } @@ -237,7 +325,7 @@ fn activate(cx: &mut Context, let candidate = match candidate.replace { Some(replace) => { - cx.resolve.replacements.insert(candidate.summary.package_id().clone(), + cx.resolve_replacements.insert(candidate.summary.package_id().clone(), replace.package_id().clone()); if cx.flag_activated(&replace, method) { return Ok(None); @@ -480,7 +568,7 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>, remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent), candidate, &method))); } - trace!("resolved: {:?}", cx.resolve); + Ok(cx) } @@ -523,8 +611,8 @@ fn activation_error(cx: &Context, dep.name(), parent.name(), dep.name()); 'outer: for v in prev_active.iter() { - for node in cx.resolve.graph.iter() { - let edges = match cx.resolve.graph.edges(node) { + for node in cx.resolve_graph.iter() { + let edges = match cx.resolve_graph.edges(node) { Some(edges) => edges, None => continue, }; @@ -709,7 +797,7 @@ impl<'a> Context<'a> { let key = (id.name().to_string(), id.source_id().clone()); let prev = self.activations.entry(key).or_insert(Vec::new()); if !prev.iter().any(|c| c == summary) { - self.resolve.graph.add(id.clone(), &[]); + self.resolve_graph.add(id.clone(), &[]); prev.push(summary.clone()); return false } @@ -722,7 +810,7 @@ impl<'a> Context<'a> { }; let has_default_feature = summary.features().contains_key("default"); - match self.resolve.features(id) { + match self.resolve_features.get(id) { Some(prev) => { features.iter().all(|f| prev.contains(f)) && (!use_default || prev.contains("default") || @@ -880,7 +968,7 @@ impl<'a> Context<'a> { // Record what list of features is active for this package. if !used_features.is_empty() { let pkgid = candidate.package_id(); - self.resolve.features.entry(pkgid.clone()) + self.resolve_features.entry(pkgid.clone()) .or_insert(HashSet::new()) .extend(used_features); } @@ -889,13 +977,15 @@ impl<'a> Context<'a> { } } -fn check_cycles(cx: &Context) -> CargoResult<()> { +fn check_cycles(resolve: &Resolve, + activations: &HashMap<(String, SourceId), Vec>>) + -> CargoResult<()> { let mut summaries = HashMap::new(); - for summary in cx.activations.values().flat_map(|v| v) { + for summary in activations.values().flat_map(|v| v) { summaries.insert(summary.package_id(), &**summary); } - return visit(&cx.resolve, - cx.resolve.root(), + return visit(resolve, + resolve.root(), &summaries, &mut HashSet::new(), &mut HashSet::new()); diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 81169c54d..219e21bca 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -15,6 +15,7 @@ pub struct Summary { package_id: PackageId, dependencies: Vec, features: HashMap>, + checksum: Option, } impl Summary { @@ -60,6 +61,7 @@ impl Summary { package_id: pkg_id, dependencies: dependencies, features: features, + checksum: None, }) } @@ -69,12 +71,20 @@ impl Summary { pub fn source_id(&self) -> &SourceId { self.package_id.source_id() } pub fn dependencies(&self) -> &[Dependency] { &self.dependencies } pub fn features(&self) -> &HashMap> { &self.features } + pub fn checksum(&self) -> Option<&str> { + self.checksum.as_ref().map(|s| &s[..]) + } pub fn override_id(mut self, id: PackageId) -> Summary { self.package_id = id; self } + pub fn set_checksum(mut self, cksum: String) -> Summary { + self.checksum = Some(cksum); + self + } + pub fn map_dependencies(mut self, f: F) -> Summary where F: FnMut(Dependency) -> Dependency { let deps = mem::replace(&mut self.dependencies, Vec::new()); diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 7a6abacab..10fb4aa87 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -153,7 +153,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, &replace, registry)); if let Some(previous) = previous { - resolved.copy_metadata(previous); + try!(resolved.merge_from(previous)); } return Ok(resolved); diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs index b0e88f1eb..0cde9e5e1 100644 --- a/src/cargo/sources/config.rs +++ b/src/cargo/sources/config.rs @@ -70,6 +70,7 @@ impl<'cfg> SourceConfigMap<'cfg> { }; let mut path = Path::new("/"); let orig_name = name; + let new_id; loop { let cfg = match self.cfgs.get(name) { Some(cfg) => cfg, @@ -85,10 +86,9 @@ impl<'cfg> SourceConfigMap<'cfg> { } None if *id == cfg.id => return Ok(id.load(self.config)), None => { - let new_id = cfg.id.with_precise(id.precise() - .map(|s| s.to_string())); - let src = new_id.load(self.config); - return Ok(Box::new(ReplacedSource::new(id, &new_id, src))) + new_id = cfg.id.with_precise(id.precise() + .map(|s| s.to_string())); + break } } debug!("following pointer to {}", name); @@ -98,6 +98,22 @@ impl<'cfg> SourceConfigMap<'cfg> { (configuration in `{}`)", name, path.display()) } } + let new_src = new_id.load(self.config); + let old_src = id.load(self.config); + if new_src.supports_checksums() != old_src.supports_checksums() { + let (supports, no_support) = if new_src.supports_checksums() { + (name, orig_name) + } else { + (orig_name, name) + }; + bail!("\ +cannot replace `{orig}` with `{name}`, the source `{supports}` supports \ +checksums, but `{no_support}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", orig = orig_name, name = name, supports = supports, no_support = no_support); + } + Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) } fn add(&mut self, name: &str, cfg: SourceConfig) { diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs index 460285e41..287135c66 100644 --- a/src/cargo/sources/registry.rs +++ b/src/cargo/sources/registry.rs @@ -421,8 +421,10 @@ impl<'cfg> RegistrySource<'cfg> { self.parse_registry_dependency(dep) }).collect(); let deps = try!(deps); + let summary = try!(Summary::new(pkgid, deps, features)); + let summary = summary.set_checksum(cksum.clone()); self.hashes.insert((name, vers), cksum); - Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false))) + Ok((summary, yanked.unwrap_or(false))) } /// Converts an encoded dependency in the registry to a cargo dependency @@ -536,6 +538,10 @@ impl<'cfg> Registry for RegistrySource<'cfg> { }); summaries.query(dep) } + + fn supports_checksums(&self) -> bool { + true + } } impl<'cfg> Source for RegistrySource<'cfg> { diff --git a/tests/cargotest/support/registry.rs b/tests/cargotest/support/registry.rs index dcf6dc6d3..26663e260 100644 --- a/tests/cargotest/support/registry.rs +++ b/tests/cargotest/support/registry.rs @@ -37,7 +37,7 @@ struct Dependency { features: Vec, } -fn init() { +pub fn init() { let config = paths::home().join(".cargo/config"); t!(fs::create_dir_all(config.parent().unwrap())); if fs::metadata(&config).is_ok() { diff --git a/tests/lockfile-compat.rs b/tests/lockfile-compat.rs new file mode 100644 index 000000000..64931dc04 --- /dev/null +++ b/tests/lockfile-compat.rs @@ -0,0 +1,278 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn oldest_lockfile_still_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(lockfile.trim())); +} + +#[test] +fn totally_wild_checksums_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"#.trim())); +} + +#[test] +fn wrong_checksum_is_an_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#)); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` was the same as before in any situation + +")); +} + +// If the checksum is unlisted in the lockfile (e.g. ) yet we can +// calculate it (e.g. it's a registry dep), then we should in theory just fill +// it in. +#[test] +fn unlisted_checksum_is_bad_if_we_calculate() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#)); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \ +could now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +")); +} + +// If the checksum is listed in the lockfile yet we cannot calculate it (e.g. +// git dependencies as of today), then make sure we choke. +#[test] +fn listed_checksum_bad_if_we_cannot_compute() { + let git = git::new("foo", |p| { + p.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + "#, git.url())) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = format!(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (git+{0})" +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum foo 0.1.0 (git+{0})" = "checksum" +"#, git.url()); + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository `[..]` +error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `foo v0.1.0 ([..])` was the same as before in either situation + +")); +} diff --git a/tests/registry.rs b/tests/registry.rs index 60ffdae75..325f42711 100644 --- a/tests/registry.rs +++ b/tests/registry.rs @@ -720,6 +720,8 @@ fn git_and_registry_dep() { #[test] fn update_publish_then_update() { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. let p = project("foo") .file("Cargo.toml", r#" [project] @@ -736,16 +738,35 @@ fn update_publish_then_update() { assert_that(p.cargo("build"), execs().with_status(0)); + // Next, publish a new package and back up the copy of the registry we just + // created. Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + fs::rename(®istry, &backup).unwrap(); - let lock = p.root().join("Cargo.lock"); - let mut s = String::new(); - File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); - File::create(&lock).unwrap() - .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); - println!("second"); + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project("foo2") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.1" + "#) + .file("src/main.rs", "fn main() {}"); + assert_that(p2.cargo_process("build"), + execs().with_status(0)); + fs::remove_dir_all(®istry).unwrap(); + fs::rename(&backup, ®istry).unwrap(); + fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock")).unwrap(); - fs::remove_dir_all(&p.root().join("target")).unwrap(); + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. assert_that(p.cargo("build"), execs().with_status(0).with_stderr(&format!("\ [UPDATING] [..] -- 2.30.2